#
CS587 - Assignment 5 (Part II)
##
Tracking Issues created for Repos on GitHub
##
Plot and Forcaste Issues
### Deliverables: - Submit a single ZIP file for both Part 1 & Part 2 that has the folowing items: 1. Your PDF document for the analysis report. 2. Your IPYNB script with the name GitHub_Repos_Issues_Forecasting.ipynb that has your source code and output for the requirements listed 2. Your HTML/PDF document that has your source code and output for the requirements listed


## REQUIREMENT 1 Use Python/GitHub API to retrieve Issues/Repos information of the past 3 years for the following repositories: - https://github.com/angular/angular - https://github.com/angular/material - https://github.com/angular/angular-cli - https://github.com/SebastianM/angular-google-maps - https://github.com/d3/d3
In [ ]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
import github3
import json
# Get an API key for GitHub and set it as GITHUB_TOKEN
# Here is the URL to guide you on how to generate your GITHUB_TOKEN
# https://help.github.com/articles/creating-an-access-token-for-command-line-use/

# create your GitHub personal access tokens from https://github.com/settings/tokens

GITHUB_TOKEN = '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'   # 'ADD-YOUR-GitHub-Personal-Token-HERE' 
gh = github3.login(token=GITHUB_TOKEN)
from tqdm import tqdm
import dateutil
import datetime
import time
def getRepoIssue(repo):
    print(repo)
    ORG = repo[0]                   
    REPO = repo[1]
    FILENAME_ISSUES = ORG + REPO+ 'issues.json'
    inputFile = open('./{}'.format(FILENAME_ISSUES), 'w')
    today = datetime.date.today() 
    for i in tqdm(range(36)):#36
        last_month = today + dateutil.relativedelta.relativedelta(months = -1)
        types = 'type:issue'
        repo = 'repo:angular/angular'
        ranges = 'created:'+str(last_month)+'..'+str(today)
        search_query = types + ' ' + repo + ' ' + ranges
        for issue in gh.search_issues(search_query):
            label_name=[]
            data={}
            current_issue = issue.as_json()
            current_issue = json.loads(current_issue)
            data['issue_number']=current_issue["number"]                          # Get issue number              
            data['created_at']= current_issue["created_at"][0:10]                 # Get created date of issue
            if current_issue["closed_at"] == None:
                data['closed_at']= current_issue["closed_at"]
            else:
                data['closed_at']= current_issue["closed_at"][0:10]               # Get closed date of issue
            for label in current_issue["labels"]:
                label_name.append(label["name"])                                  # Get label name of issue
            data['labels']= label_name
            data['State'] = current_issue["state"]                                # It gives state of issue like closed or open
            data['Author'] = current_issue["user"]["login"]                       # Get Author of issue
            out=json.dumps(data)                                                  # save this all information to a JSON file
            inputFile.write(out+ '\n')
            
            today = last_month
            time.sleep(10)
    inputFile.close()
    print('Done')
    
repolist = [('angular' ,'angular'),('angular' ,'material'),('angular'  ,'angular-cli'),('SebastianM','angular-google-maps'),('d3','d3')]

for repo in repolist:
    getRepoIssue(repo)
In [ ]:
import os
In [459]:
repos = [x for x in os.listdir('./issues') if x[0] != '.']
dfs = pd.DataFrame()
for repo in repos:
    list_of_issues_dict_data = [json.loads(line) for line in open('./issues/{}'.format(repo))]
    issues_df = pd.DataFrame(list_of_issues_dict_data)
    issues_df['repo'] = repo.split('.')[0]
    dfs = dfs.append(issues_df,ignore_index=True)
In [ ]:
import warnings
warnings.filterwarnings('ignore')

## REQUIREMENT 2 - A line chart to plot the issues for every repository
In [461]:
dfs.groupby(['repo']).count()[['issue_number']].plot(figsize=(25,10))
Out[461]:
<AxesSubplot:xlabel='repo'>

## REQUIREMENT 3 - A bar chart to plot the issues created for every month for every repository
In [462]:
dfs['create_month'] = dfs['created_at'].map(lambda x: int(x.split('-')[1]))
In [463]:
dfs.groupby(['create_month','repo']).create_month.count().unstack().plot(kind='bar',stacked=False,figsize=(25,10))
Out[463]:
<AxesSubplot:xlabel='create_month'>


## REQUIREMENT 4 - A bar chart to plot the starts for every repository
In [ ]:
 


## REQUIREMENT 5 - A bar chart to plot the forks for every repository


## REQUIREMENT 6 - A bar chart to plot the issues closed for every week for every repository
In [464]:
dfs6= dfs[dfs['closed_at'].notna()]
In [465]:
from datetime import datetime 

dfs6['closed_weekday'] = [datetime.strptime(x,'%Y-%m-%d').weekday() for x in dfs6['closed_at']]
In [466]:
dfs6.groupby(['closed_weekday','repo']).closed_weekday.count()\
.unstack().plot(kind='bar',stacked=False,figsize=(25,10))
Out[466]:
<AxesSubplot:xlabel='closed_weekday'>


## REQUIREMENT 7 - A stack-bar Chart to plot the created and closed issues for every repository
In [ ]:
#Add your code for requirement 7 in this cell
In [467]:
dfs.groupby(['repo']).count()[['closed_at','created_at']].plot(kind='bar',stacked=True,figsize=(25,10))
Out[467]:
<AxesSubplot:xlabel='repo'>


## REQUIREMENT 8 (FACEBOOK/PROPHET) - Use Facebook/Prophet package to forecast the following for every repository
In [ ]:
import prophet
from prophet import Prophet
#### REQ8-1). The day of the week maximum number of issues created
In [470]:
def r1(reponame):
    df = dfs[dfs['repo'] ==reponame]
    df = df.groupby('created_at')['created_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf
    pdf['ds_new'] = pd.to_datetime(pdf['ds']) - pd.to_timedelta(7, unit='d')
    df_weekly_max = pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='W-MON')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_orig','y']])
    df_final = pd.DataFrame(['ds','y'])
    df_final = df_weekly_max[['ds_orig','y']]
    df_final.columns = ['ds','y']
    m = Prophet()
    m.fit(df_final)
    future = m.make_future_dataframe(periods=365) # Forcast for 1 year
    forecast = m.predict(future)
    fig1 = m.plot(forecast)
In [471]:
for i in range(5):
    r1(dfs['repo'].unique()[i])
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
#### REQ8-2). The day of the week maximum number of issues closed
In [472]:
def r2(reponame):
    df = dfs6[dfs6['repo'] ==reponame]
    df = df.groupby('closed_at')['closed_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds']) - pd.to_timedelta(7, unit='d')
    df_weekly_max = pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='W-MON')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_orig','y']])
    df_final = pd.DataFrame(['ds','y'])
    df_final = df_weekly_max[['ds_orig','y']]
    df_final.columns = ['ds','y']
    m = Prophet()
    m.fit(df_final)
    future = m.make_future_dataframe(periods=365) # Forcast for 1 year
    forecast = m.predict(future)
    fig1 = m.plot(forecast)
In [474]:
for i in range(5):
    print(dfs['repo'].unique()[i])
    r2(dfs['repo'].unique()[i])
angular-cliissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angular-google-mapsissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
materialissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
d3issues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angularissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
In [ ]:
 
#### REQ8-3). The month of the year that has maximum number of issues closed
In [ ]:
 
In [475]:
def r3(reponame):
    df = dfs6[dfs6['repo'] ==reponame]
    df['closed_at'] = df['closed_at'].map(lambda x: '-'.join(x.split('-')[:-1]+['01']))
    df = df.groupby('closed_at')['closed_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds'])
    # pdf['ds_new'] = pd.to_datetime(pdf['ds']) - pd.to_timedelta(1, unit='m')
    df_weekly_max = pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='Y')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_orig','y']])
    df_final = pd.DataFrame(['ds','y'])
    df_final = df_weekly_max[['ds_orig','y']]
    df_final.columns = ['ds','y']
#     df_final['ds'] = df_final['ds'].map(lambda x: '-'.join(x.split('-')[:-1]))
    m = Prophet()
    m.fit(df_final)
    future = m.make_future_dataframe(periods=12) # Forcast for 1 year
    forecast = m.predict(future)
    fig1 = m.plot(forecast)
In [476]:
for i in range(5):
    print(dfs['repo'].unique()[i])
    r3(dfs['repo'].unique()[i])
INFO:prophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angular-cliissues
INFO:prophet:n_changepoints greater than number of observations. Using 2.
INFO:prophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:n_changepoints greater than number of observations. Using 3.
angular-google-mapsissues
INFO:prophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:n_changepoints greater than number of observations. Using 3.
materialissues
INFO:prophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:n_changepoints greater than number of observations. Using 7.
d3issues
INFO:prophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
INFO:prophet:n_changepoints greater than number of observations. Using 2.
angularissues
#### REQ8-4). Plot the created issues forecast
In [ ]:
def r4(reponame):
    df = dfs[dfs['repo'] ==reponame]

    df = df.groupby('created_at')['created_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds'])

    df_final = pd.DataFrame(['ds','y'])
    df_final = pdf[['ds_orig','y']]
    df_final.columns = ['ds','y']
    m = Prophet()
    m.fit(df_final)
    future = m.make_future_dataframe(periods=365) # Forcast for 1 year
    forecast = m.predict(future)
    fig1 = m.plot(forecast)
In [477]:
for i in range(5):
    print(dfs['repo'].unique()[i])
    r4(dfs['repo'].unique()[i])
angular-cliissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angular-google-mapsissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
materialissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
d3issues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angularissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
#### REQ8-5). Plot the closed issues forecast
In [480]:
def r5(reponame):

    df = dfs6[dfs6['repo'] ==reponame]

    df = df.groupby('closed_at')['closed_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds'])
    df_final = pd.DataFrame(['ds','y'])
    df_final = pdf[['ds_orig','y']]
    df_final.columns = ['ds','y']
    m = Prophet()
    m.fit(df_final)
    future = m.make_future_dataframe(periods=365) # Forcast for 1 year
    forecast = m.predict(future)
    fig1 = m.plot(forecast)
In [481]:
for i in range(5):
    print(dfs['repo'].unique()[i])
    r5(dfs['repo'].unique()[i])
angular-cliissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angular-google-mapsissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
materialissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
d3issues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
angularissues
INFO:prophet:Disabling daily seasonality. Run prophet with daily_seasonality=True to override this.
#### REQ8-6). Plot the pulls forecast
#### REQ8-7). Plot the commits forecast
#### REQ8-8). Plot the branches forecast
#### REQ8-9). Plot the contributors forecast
#### REQ8-10). Plot the releases forecast


## REQUIREMENT 8 (TENSORFLOW/KERAS LSTM) - Use TensorFlow/Keras LSTM package to forecast the following for every repository
In [ ]:
from tensorflow import keras 
In [ ]:
def plot_and_train(tensor_Created):

    xtrain = tensor_Created.iloc[0:int(len(tensor_Created)*0.8)]['time']\
    .to_numpy().reshape(int(len(tensor_Created)*0.8),1,1)
    xtrain.shape

    ytrain = tensor_Created.iloc[0:int(len(tensor_Created)*0.8)]['value']\
    .astype('float32')\
    .to_numpy()

    ytrain.shape

    model = keras.Sequential()
    model.add(keras.layers.LSTM(
    units=128,input_shape=(1,1)
    ))
    model.add(keras.layers.Dropout(0.2))
    model.add(keras.layers.Dense(units=1))
    model.compile(
    loss= 'mean_squared_error',optimizer='adam')

    model.fit(xtrain,ytrain,epochs=30)

    xtest = tensor_Created.iloc[int(len(tensor_Created)*0.8):]['time']\
    .to_numpy().reshape(len(tensor_Created) - int(len(tensor_Created)*0.8),1,1)
    ytest = tensor_Created.iloc[int(len(tensor_Created)*0.8):]['value']\
    .astype('float32')\
    .to_numpy()

    ypred = model.predict(xtest)
    fig,axs = plt.subplots(1,1,figsize=(20,8))
    axs.plot([x for x in range(0,len(ytrain))],ytrain,'g',label='history')
    axs.plot([x for x in range(len(ytrain),len(ytrain)+len(ytest))]
                        ,ytest,marker='.',label='true')
    axs.plot([x for x in range(len(ytrain),len(ytrain)+len(ytest))]
             ,ypred,'r',label='prediction')
    plt.show()
#### REQ8-1). The day of the week maximum number of issues created
In [482]:
def k1(reponame):
    df = dfs[dfs['repo'] ==reponame]
    df = df.groupby('created_at')['created_at']
    df_new = df.describe()
    dfnew1 = pd.Series(df_new['top'], name='ds_original')
    df_new = pd.concat([df_new, dfnew1], axis=1)
    datafrm_pdf = pd.DataFrame(['ds','ds_original','y'])
    datafrm_pdf = df_new[['top','ds_original','count']]
    datafrm_pdf.columns = ['ds','ds_original','y']
    datafrm_pdf['ds_new'] = pd.to_datetime(datafrm_pdf['ds']) - pd.to_timedelta(7, unit='d')
    df_weekly_maximum = datafrm_pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='W-MON')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_original','y']])
    df_created_output = pd.DataFrame(['ds','y'])
    df_created_output = df_weekly_maximum[['ds_original','y']]
    df_created_output.columns = ['ds','y']
    tensor_Created = df_created_output
    tensor_Created = tensor_Created[['ds','y']]
    df = pd.DataFrame(tensor_Created)
    tensor_Created.rename(columns={'ds':'timestamp'}, inplace=True)
    tensor_Created.rename(columns={'y':'value'}, inplace=True)
    # print(tensor_Created)

    firstDay = min(pd.to_datetime(tensor_Created['timestamp']))

    tensor_Created['time'] = [float(x.days) for x in \
                              [x - firstDay for x in pd.to_datetime(tensor_Created['timestamp'])]]
    return tensor_Created
In [483]:
for i in range(5):
    plot_and_train(k1(dfs['repo'].unique()[i]))
Epoch 1/30
6/6 [==============================] - 2s 6ms/step - loss: 40.3379
Epoch 2/30
6/6 [==============================] - 0s 5ms/step - loss: 36.8622
Epoch 3/30
6/6 [==============================] - 0s 5ms/step - loss: 32.5123
Epoch 4/30
6/6 [==============================] - 0s 5ms/step - loss: 29.2733
Epoch 5/30
6/6 [==============================] - 0s 5ms/step - loss: 26.8783
Epoch 6/30
6/6 [==============================] - 0s 5ms/step - loss: 24.8575
Epoch 7/30
6/6 [==============================] - 0s 4ms/step - loss: 22.2255
Epoch 8/30
6/6 [==============================] - 0s 5ms/step - loss: 18.5114
Epoch 9/30
6/6 [==============================] - 0s 4ms/step - loss: 16.2103
Epoch 10/30
6/6 [==============================] - 0s 4ms/step - loss: 14.6249
Epoch 11/30
6/6 [==============================] - 0s 4ms/step - loss: 13.6604
Epoch 12/30
6/6 [==============================] - 0s 4ms/step - loss: 12.9889
Epoch 13/30
6/6 [==============================] - 0s 5ms/step - loss: 12.4530
Epoch 14/30
6/6 [==============================] - 0s 4ms/step - loss: 12.3520
Epoch 15/30
6/6 [==============================] - 0s 5ms/step - loss: 11.9230
Epoch 16/30
6/6 [==============================] - 0s 5ms/step - loss: 11.8813
Epoch 17/30
6/6 [==============================] - 0s 4ms/step - loss: 11.5843
Epoch 18/30
6/6 [==============================] - 0s 4ms/step - loss: 11.2211
Epoch 19/30
6/6 [==============================] - 0s 4ms/step - loss: 12.2544
Epoch 20/30
6/6 [==============================] - 0s 5ms/step - loss: 11.5786
Epoch 21/30
6/6 [==============================] - 0s 5ms/step - loss: 11.8827
Epoch 22/30
6/6 [==============================] - 0s 5ms/step - loss: 11.4665
Epoch 23/30
6/6 [==============================] - 0s 4ms/step - loss: 11.6020
Epoch 24/30
6/6 [==============================] - 0s 4ms/step - loss: 11.2635
Epoch 25/30
6/6 [==============================] - 0s 4ms/step - loss: 11.3555
Epoch 26/30
6/6 [==============================] - 0s 4ms/step - loss: 11.4392
Epoch 27/30
6/6 [==============================] - 0s 4ms/step - loss: 11.0738
Epoch 28/30
6/6 [==============================] - 0s 4ms/step - loss: 11.3233
Epoch 29/30
6/6 [==============================] - 0s 5ms/step - loss: 11.3702
Epoch 30/30
6/6 [==============================] - 0s 4ms/step - loss: 10.5266
Epoch 1/30
10/10 [==============================] - 2s 5ms/step - loss: 1.3680
Epoch 2/30
10/10 [==============================] - 0s 4ms/step - loss: 0.9297
Epoch 3/30
10/10 [==============================] - 0s 4ms/step - loss: 0.9457
Epoch 4/30
10/10 [==============================] - 0s 5ms/step - loss: 0.8875
Epoch 5/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8884
Epoch 6/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8891
Epoch 7/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8769
Epoch 8/30
10/10 [==============================] - 0s 4ms/step - loss: 0.7930
Epoch 9/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8864
Epoch 10/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8200
Epoch 11/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8949
Epoch 12/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8797
Epoch 13/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8599
Epoch 14/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8554
Epoch 15/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8680
Epoch 16/30
10/10 [==============================] - 0s 4ms/step - loss: 0.9468
Epoch 17/30
10/10 [==============================] - 0s 4ms/step - loss: 0.9029
Epoch 18/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8533
Epoch 19/30
10/10 [==============================] - 0s 5ms/step - loss: 0.8241
Epoch 20/30
10/10 [==============================] - 0s 5ms/step - loss: 0.8704
Epoch 21/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8317
Epoch 22/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8622
Epoch 23/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8217
Epoch 24/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8370
Epoch 25/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8806
Epoch 26/30
10/10 [==============================] - 0s 5ms/step - loss: 0.8320
Epoch 27/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8421
Epoch 28/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8322
Epoch 29/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8076
Epoch 30/30
10/10 [==============================] - 0s 4ms/step - loss: 0.8190
Epoch 1/30
8/8 [==============================] - 2s 5ms/step - loss: 7.8798
Epoch 2/30
8/8 [==============================] - 0s 4ms/step - loss: 5.6051
Epoch 3/30
8/8 [==============================] - 0s 5ms/step - loss: 3.0876
Epoch 4/30
8/8 [==============================] - 0s 4ms/step - loss: 2.0588
Epoch 5/30
8/8 [==============================] - 0s 4ms/step - loss: 1.7368
Epoch 6/30
8/8 [==============================] - 0s 4ms/step - loss: 1.4632
Epoch 7/30
8/8 [==============================] - 0s 4ms/step - loss: 1.2746
Epoch 8/30
8/8 [==============================] - 0s 4ms/step - loss: 1.2280
Epoch 9/30
8/8 [==============================] - 0s 4ms/step - loss: 1.1318
Epoch 10/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0540
Epoch 11/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0594
Epoch 12/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0111
Epoch 13/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0623
Epoch 14/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0393
Epoch 15/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9838
Epoch 16/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0185
Epoch 17/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9947
Epoch 18/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0015
Epoch 19/30
8/8 [==============================] - 0s 6ms/step - loss: 1.0380
Epoch 20/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9632
Epoch 21/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9555
Epoch 22/30
8/8 [==============================] - 0s 5ms/step - loss: 1.0006
Epoch 23/30
8/8 [==============================] - 0s 5ms/step - loss: 1.0337
Epoch 24/30
8/8 [==============================] - 0s 5ms/step - loss: 1.0210
Epoch 25/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9964
Epoch 26/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0016
Epoch 27/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9292
Epoch 28/30
8/8 [==============================] - 0s 4ms/step - loss: 1.0106
Epoch 29/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9905
Epoch 30/30
8/8 [==============================] - 0s 4ms/step - loss: 0.9380
Epoch 1/30
11/11 [==============================] - 2s 5ms/step - loss: 4.4384
Epoch 2/30
11/11 [==============================] - 0s 4ms/step - loss: 1.6000
Epoch 3/30
11/11 [==============================] - 0s 4ms/step - loss: 1.1197
Epoch 4/30
11/11 [==============================] - 0s 4ms/step - loss: 1.0347
Epoch 5/30
11/11 [==============================] - 0s 4ms/step - loss: 1.0518
Epoch 6/30
11/11 [==============================] - 0s 4ms/step - loss: 1.0223
Epoch 7/30
11/11 [==============================] - 0s 4ms/step - loss: 1.0516
Epoch 8/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9996
Epoch 9/30
11/11 [==============================] - 0s 5ms/step - loss: 1.0120
Epoch 10/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9465
Epoch 11/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9694
Epoch 12/30
11/11 [==============================] - 0s 4ms/step - loss: 1.0329
Epoch 13/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9703
Epoch 14/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9915
Epoch 15/30
11/11 [==============================] - 0s 5ms/step - loss: 0.9819
Epoch 16/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9942
Epoch 17/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9397
Epoch 18/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9910
Epoch 19/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9665
Epoch 20/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9727
Epoch 21/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9912
Epoch 22/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9899
Epoch 23/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9562
Epoch 24/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9485
Epoch 25/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9397
Epoch 26/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9171
Epoch 27/30
11/11 [==============================] - 0s 4ms/step - loss: 1.0077
Epoch 28/30
11/11 [==============================] - 0s 5ms/step - loss: 0.9504
Epoch 29/30
11/11 [==============================] - 0s 4ms/step - loss: 0.9841
Epoch 30/30
11/11 [==============================] - 0s 4ms/step - loss: 0.8971
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
/tmp/ipykernel_91013/1009027695.py in <module>
      1 for i in range(5):
----> 2     plot_and_train(k1(dfs['repo'].unique()[i]))

/tmp/ipykernel_91013/3357273101.py in k1(reponame)
      2     df = dfs[dfs['repo'] ==reponame]
      3     df = df.groupby('created_at')['created_at']
----> 4     df_new = df.describe()
      5     dfnew1 = pd.Series(df_new['top'], name='ds_original')
      6     df_new = pd.concat([df_new, dfnew1], axis=1)

~/py37/lib/python3.7/site-packages/pandas/core/groupby/generic.py in describe(self, **kwargs)
    675     @doc(Series.describe)
    676     def describe(self, **kwargs):
--> 677         result = self.apply(lambda x: x.describe(**kwargs))
    678         if self.axis == 1:
    679             return result.T

~/py37/lib/python3.7/site-packages/pandas/core/groupby/generic.py in apply(self, func, *args, **kwargs)
    221     )
    222     def apply(self, func, *args, **kwargs):
--> 223         return super().apply(func, *args, **kwargs)
    224 
    225     @doc(_agg_template, examples=_agg_examples_doc, klass="Series")

~/py37/lib/python3.7/site-packages/pandas/core/groupby/groupby.py in apply(self, func, *args, **kwargs)
   1251         with option_context("mode.chained_assignment", None):
   1252             try:
-> 1253                 result = self._python_apply_general(f, self._selected_obj)
   1254             except TypeError:
   1255                 # gh-20949

~/py37/lib/python3.7/site-packages/pandas/core/groupby/groupby.py in _python_apply_general(self, f, data)
   1285             data after applying f
   1286         """
-> 1287         keys, values, mutated = self.grouper.apply(f, data, self.axis)
   1288 
   1289         return self._wrap_applied_output(

~/py37/lib/python3.7/site-packages/pandas/core/groupby/ops.py in apply(self, f, data, axis)
    818             # group might be modified
    819             group_axes = group.axes
--> 820             res = f(group)
    821             if not _is_indexed_like(res, group_axes, axis):
    822                 mutated = True

~/py37/lib/python3.7/site-packages/pandas/core/groupby/generic.py in <lambda>(x)
    675     @doc(Series.describe)
    676     def describe(self, **kwargs):
--> 677         result = self.apply(lambda x: x.describe(**kwargs))
    678         if self.axis == 1:
    679             return result.T

~/py37/lib/python3.7/site-packages/pandas/core/generic.py in describe(self, percentiles, include, exclude, datetime_is_numeric)
  10018             exclude=exclude,
  10019             datetime_is_numeric=datetime_is_numeric,
> 10020             percentiles=percentiles,
  10021         )
  10022 

~/py37/lib/python3.7/site-packages/pandas/core/describe.py in describe_ndframe(obj, include, exclude, datetime_is_numeric, percentiles)
     93         )
     94 
---> 95     result = describer.describe(percentiles=percentiles)
     96     return cast(FrameOrSeries, result)
     97 

~/py37/lib/python3.7/site-packages/pandas/core/describe.py in describe(self, percentiles)
    133             self.datetime_is_numeric,
    134         )
--> 135         return describe_func(self.obj, percentiles)
    136 
    137 

~/py37/lib/python3.7/site-packages/pandas/core/describe.py in describe_categorical_1d(data, percentiles_ignored)
    261     names = ["count", "unique", "top", "freq"]
    262     objcounts = data.value_counts()
--> 263     count_unique = len(objcounts[objcounts != 0])
    264     if count_unique > 0:
    265         top, freq = objcounts.index[0], objcounts.iloc[0]

KeyboardInterrupt: 
#### REQ8-2). The day of the week maximum number of issues closed
In [ ]:
def k2(reponame):
    df = dfs6[dfs6['repo'] ==reponame]
    df = df.groupby('closed_at')['closed_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf
    pdf['ds_new'] = pd.to_datetime(pdf['ds']) - pd.to_timedelta(7, unit='d')
    df_weekly_max = pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='W-MON')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_orig','y']])
    # print(df_weekly_max)
    df_final = pd.DataFrame(['ds','y'])
    df_final = df_weekly_max[['ds_orig','y']]
    df_final.columns = ['timestamp','value']
    
    firstDay = min(pd.to_datetime(df_final['timestamp']))

    df_final['time'] = [float(x.days) for x in \
                              [x - firstDay for x in pd.to_datetime(df_final['timestamp'])]]
    return df_final
    
In [486]:
for i in range(5):
    plot_and_train(k2(dfs['repo'].unique()[i]))
Epoch 1/30
6/6 [==============================] - 2s 5ms/step - loss: 74.1200
Epoch 2/30
6/6 [==============================] - 0s 4ms/step - loss: 67.0349
Epoch 3/30
6/6 [==============================] - 0s 4ms/step - loss: 60.9096
Epoch 4/30
6/6 [==============================] - 0s 4ms/step - loss: 51.6273
Epoch 5/30
6/6 [==============================] - 0s 4ms/step - loss: 42.5123
Epoch 6/30
6/6 [==============================] - 0s 5ms/step - loss: 36.0137
Epoch 7/30
6/6 [==============================] - 0s 4ms/step - loss: 28.2219
Epoch 8/30
6/6 [==============================] - 0s 4ms/step - loss: 22.9476
Epoch 9/30
6/6 [==============================] - 0s 4ms/step - loss: 18.4147
Epoch 10/30
6/6 [==============================] - 0s 4ms/step - loss: 15.5896
Epoch 11/30
6/6 [==============================] - 0s 4ms/step - loss: 13.4435
Epoch 12/30
6/6 [==============================] - 0s 5ms/step - loss: 12.3899
Epoch 13/30
6/6 [==============================] - 0s 5ms/step - loss: 13.0265
Epoch 14/30
6/6 [==============================] - 0s 5ms/step - loss: 12.3130
Epoch 15/30
6/6 [==============================] - 0s 5ms/step - loss: 12.7757
Epoch 16/30
6/6 [==============================] - 0s 5ms/step - loss: 12.6435
Epoch 17/30
6/6 [==============================] - 0s 5ms/step - loss: 12.4903
Epoch 18/30
6/6 [==============================] - 0s 5ms/step - loss: 12.3464
Epoch 19/30
6/6 [==============================] - 0s 4ms/step - loss: 12.3604
Epoch 20/30
6/6 [==============================] - 0s 4ms/step - loss: 12.4186
Epoch 21/30
6/6 [==============================] - 0s 4ms/step - loss: 12.1640
Epoch 22/30
6/6 [==============================] - 0s 4ms/step - loss: 11.9021
Epoch 23/30
6/6 [==============================] - 0s 4ms/step - loss: 11.9905
Epoch 24/30
6/6 [==============================] - 0s 5ms/step - loss: 11.9494
Epoch 25/30
6/6 [==============================] - 0s 5ms/step - loss: 11.8648
Epoch 26/30
6/6 [==============================] - 0s 4ms/step - loss: 11.8564
Epoch 27/30
6/6 [==============================] - 0s 4ms/step - loss: 11.7569
Epoch 28/30
6/6 [==============================] - 0s 5ms/step - loss: 11.9083
Epoch 29/30
6/6 [==============================] - 0s 5ms/step - loss: 11.8245
Epoch 30/30
6/6 [==============================] - 0s 5ms/step - loss: 11.9770
Epoch 1/30
7/7 [==============================] - 2s 5ms/step - loss: 107.4818
Epoch 2/30
7/7 [==============================] - 0s 4ms/step - loss: 104.2688
Epoch 3/30
7/7 [==============================] - 0s 4ms/step - loss: 100.1935
Epoch 4/30
7/7 [==============================] - 0s 4ms/step - loss: 96.6306
Epoch 5/30
7/7 [==============================] - 0s 4ms/step - loss: 95.4873
Epoch 6/30
7/7 [==============================] - 0s 4ms/step - loss: 94.7735
Epoch 7/30
7/7 [==============================] - 0s 5ms/step - loss: 96.1811
Epoch 8/30
7/7 [==============================] - 0s 4ms/step - loss: 95.2301
Epoch 9/30
7/7 [==============================] - 0s 5ms/step - loss: 95.7991
Epoch 10/30
7/7 [==============================] - 0s 4ms/step - loss: 95.3945
Epoch 11/30
7/7 [==============================] - 0s 5ms/step - loss: 95.0832
Epoch 12/30
7/7 [==============================] - 0s 4ms/step - loss: 94.9185
Epoch 13/30
7/7 [==============================] - 0s 4ms/step - loss: 95.5069
Epoch 14/30
7/7 [==============================] - 0s 5ms/step - loss: 94.7043
Epoch 15/30
7/7 [==============================] - 0s 5ms/step - loss: 94.6864
Epoch 16/30
7/7 [==============================] - 0s 4ms/step - loss: 95.8631
Epoch 17/30
7/7 [==============================] - 0s 4ms/step - loss: 95.8533
Epoch 18/30
7/7 [==============================] - 0s 5ms/step - loss: 95.7566
Epoch 19/30
7/7 [==============================] - 0s 4ms/step - loss: 94.8071
Epoch 20/30
7/7 [==============================] - 0s 4ms/step - loss: 95.5525
Epoch 21/30
7/7 [==============================] - 0s 4ms/step - loss: 94.9128
Epoch 22/30
7/7 [==============================] - 0s 5ms/step - loss: 94.7027
Epoch 23/30
7/7 [==============================] - 0s 4ms/step - loss: 95.4677
Epoch 24/30
7/7 [==============================] - 0s 4ms/step - loss: 95.9445
Epoch 25/30
7/7 [==============================] - 0s 4ms/step - loss: 95.3466
Epoch 26/30
7/7 [==============================] - 0s 4ms/step - loss: 95.4067
Epoch 27/30
7/7 [==============================] - 0s 4ms/step - loss: 94.9906
Epoch 28/30
7/7 [==============================] - 0s 4ms/step - loss: 96.1412
Epoch 29/30
7/7 [==============================] - 0s 4ms/step - loss: 95.2857
Epoch 30/30
7/7 [==============================] - 0s 4ms/step - loss: 94.7068
Epoch 1/30
6/6 [==============================] - 2s 5ms/step - loss: 13.7108
Epoch 2/30
6/6 [==============================] - 0s 4ms/step - loss: 12.3670
Epoch 3/30
6/6 [==============================] - 0s 5ms/step - loss: 10.9014
Epoch 4/30
6/6 [==============================] - 0s 5ms/step - loss: 9.7541
Epoch 5/30
6/6 [==============================] - 0s 5ms/step - loss: 9.1348
Epoch 6/30
6/6 [==============================] - 0s 4ms/step - loss: 8.9016
Epoch 7/30
6/6 [==============================] - 0s 4ms/step - loss: 8.8539
Epoch 8/30
6/6 [==============================] - 0s 5ms/step - loss: 8.5987
Epoch 9/30
6/6 [==============================] - 0s 4ms/step - loss: 8.4934
Epoch 10/30
6/6 [==============================] - 0s 5ms/step - loss: 8.2805
Epoch 11/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6209
Epoch 12/30
6/6 [==============================] - 0s 4ms/step - loss: 8.4046
Epoch 13/30
6/6 [==============================] - 0s 4ms/step - loss: 8.3703
Epoch 14/30
6/6 [==============================] - 0s 4ms/step - loss: 8.4253
Epoch 15/30
6/6 [==============================] - 0s 4ms/step - loss: 8.4068
Epoch 16/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6456
Epoch 17/30
6/6 [==============================] - 0s 4ms/step - loss: 8.5101
Epoch 18/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6263
Epoch 19/30
6/6 [==============================] - 0s 4ms/step - loss: 8.4544
Epoch 20/30
6/6 [==============================] - 0s 4ms/step - loss: 8.3588
Epoch 21/30
6/6 [==============================] - 0s 4ms/step - loss: 8.0978
Epoch 22/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6571
Epoch 23/30
6/6 [==============================] - 0s 4ms/step - loss: 8.5062
Epoch 24/30
6/6 [==============================] - 0s 4ms/step - loss: 8.5077
Epoch 25/30
6/6 [==============================] - 0s 4ms/step - loss: 8.4776
Epoch 26/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6114
Epoch 27/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6961
Epoch 28/30
6/6 [==============================] - 0s 4ms/step - loss: 8.6640
Epoch 29/30
6/6 [==============================] - 0s 4ms/step - loss: 8.1770
Epoch 30/30
6/6 [==============================] - 0s 4ms/step - loss: 8.2620
Epoch 1/30
9/9 [==============================] - 2s 4ms/step - loss: 17.5175
Epoch 2/30
9/9 [==============================] - 0s 4ms/step - loss: 16.8972
Epoch 3/30
9/9 [==============================] - 0s 4ms/step - loss: 16.6780
Epoch 4/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3357
Epoch 5/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3795
Epoch 6/30
9/9 [==============================] - 0s 4ms/step - loss: 16.7081
Epoch 7/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4403
Epoch 8/30
9/9 [==============================] - 0s 4ms/step - loss: 16.2461
Epoch 9/30
9/9 [==============================] - 0s 4ms/step - loss: 16.5170
Epoch 10/30
9/9 [==============================] - 0s 4ms/step - loss: 16.6691
Epoch 11/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4504
Epoch 12/30
9/9 [==============================] - 0s 4ms/step - loss: 16.5629
Epoch 13/30
9/9 [==============================] - 0s 4ms/step - loss: 16.7080
Epoch 14/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4483
Epoch 15/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4633
Epoch 16/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4959
Epoch 17/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3682
Epoch 18/30
9/9 [==============================] - 0s 4ms/step - loss: 16.5014
Epoch 19/30
9/9 [==============================] - 0s 4ms/step - loss: 16.6836
Epoch 20/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3210
Epoch 21/30
9/9 [==============================] - 0s 4ms/step - loss: 16.2999
Epoch 22/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4754
Epoch 23/30
9/9 [==============================] - 0s 4ms/step - loss: 16.2987
Epoch 24/30
9/9 [==============================] - 0s 4ms/step - loss: 16.5386
Epoch 25/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3680
Epoch 26/30
9/9 [==============================] - 0s 4ms/step - loss: 16.6911
Epoch 27/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4638
Epoch 28/30
9/9 [==============================] - 0s 4ms/step - loss: 16.4680
Epoch 29/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3432
Epoch 30/30
9/9 [==============================] - 0s 4ms/step - loss: 16.3844
Epoch 1/30
5/5 [==============================] - 2s 4ms/step - loss: 159.6958
Epoch 2/30
5/5 [==============================] - 0s 4ms/step - loss: 150.1990
Epoch 3/30
5/5 [==============================] - 0s 4ms/step - loss: 142.5815
Epoch 4/30
5/5 [==============================] - 0s 4ms/step - loss: 136.9518
Epoch 5/30
5/5 [==============================] - 0s 4ms/step - loss: 128.3571
Epoch 6/30
5/5 [==============================] - 0s 4ms/step - loss: 120.7656
Epoch 7/30
5/5 [==============================] - 0s 5ms/step - loss: 112.6742
Epoch 8/30
5/5 [==============================] - 0s 4ms/step - loss: 105.1713
Epoch 9/30
5/5 [==============================] - 0s 5ms/step - loss: 97.7303
Epoch 10/30
5/5 [==============================] - 0s 5ms/step - loss: 85.8433
Epoch 11/30
5/5 [==============================] - 0s 4ms/step - loss: 77.2330
Epoch 12/30
5/5 [==============================] - 0s 5ms/step - loss: 68.3689
Epoch 13/30
5/5 [==============================] - 0s 4ms/step - loss: 58.3605
Epoch 14/30
5/5 [==============================] - 0s 4ms/step - loss: 50.6090
Epoch 15/30
5/5 [==============================] - 0s 4ms/step - loss: 43.5510
Epoch 16/30
5/5 [==============================] - 0s 5ms/step - loss: 40.1509
Epoch 17/30
5/5 [==============================] - 0s 4ms/step - loss: 36.9133
Epoch 18/30
5/5 [==============================] - 0s 4ms/step - loss: 33.8098
Epoch 19/30
5/5 [==============================] - 0s 4ms/step - loss: 31.3798
Epoch 20/30
5/5 [==============================] - 0s 4ms/step - loss: 30.7913
Epoch 21/30
5/5 [==============================] - 0s 4ms/step - loss: 29.3891
Epoch 22/30
5/5 [==============================] - 0s 4ms/step - loss: 28.7196
Epoch 23/30
5/5 [==============================] - 0s 4ms/step - loss: 28.4255
Epoch 24/30
5/5 [==============================] - 0s 4ms/step - loss: 28.9692
Epoch 25/30
5/5 [==============================] - 0s 4ms/step - loss: 29.4308
Epoch 26/30
5/5 [==============================] - 0s 5ms/step - loss: 27.8648
Epoch 27/30
5/5 [==============================] - 0s 5ms/step - loss: 29.4379
Epoch 28/30
5/5 [==============================] - 0s 5ms/step - loss: 29.1608
Epoch 29/30
5/5 [==============================] - 0s 5ms/step - loss: 28.6897
Epoch 30/30
5/5 [==============================] - 0s 5ms/step - loss: 27.9169
#### REQ8-3). The month of the year that has maximum number of issues closed
In [ ]:
def k3(reponame):
    df = dfs6[dfs6['repo'] ==reponame]
    df['closed_at'] = df['closed_at'].map(lambda x: '-'.join(x.split('-')[:-1]+['01']))
    df = df.groupby('closed_at')['closed_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds'])
    # pdf['ds_new'] = pd.to_datetime(pdf['ds']) - pd.to_timedelta(1, unit='m')
    df_weekly_max = pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='Y')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_orig','y']])
    print(df_weekly_max)



    df_final = pd.DataFrame(['ds','y'])
    df_final = df_weekly_max[['ds_orig','y']]
    df_final.columns = ['ds','y']
    # df_final['ds'] = df_final['ds'].map(lambda x: '-'.join(x.split('-')[:-1]))
    df_final.columns = ['timestamp','value']
    
    firstDay = min(pd.to_datetime(df_final['timestamp']))

    df_final['time'] = [float(x.days/30) for x in \
                              [x - firstDay for x in pd.to_datetime(df_final['timestamp'])]]
    return df_final
In [488]:
for i in range(5):
    plot_and_train(k3(dfs['repo'].unique()[i]))
                  ds_orig    y
ds_new                        
2018-12-31 1   2018-11-01  154
2019-12-31 8   2019-06-01  148
2020-12-31 19  2020-05-01  135
2021-12-31 31  2021-05-01  204
Epoch 1/30
1/1 [==============================] - 2s 2s/step - loss: 21186.3379
Epoch 2/30
1/1 [==============================] - 0s 8ms/step - loss: 21174.1211
Epoch 3/30
1/1 [==============================] - 0s 6ms/step - loss: 21181.0234
Epoch 4/30
1/1 [==============================] - 0s 6ms/step - loss: 21138.5117
Epoch 5/30
1/1 [==============================] - 0s 7ms/step - loss: 21147.5215
Epoch 6/30
1/1 [==============================] - 0s 6ms/step - loss: 21103.4023
Epoch 7/30
1/1 [==============================] - 0s 6ms/step - loss: 21107.5410
Epoch 8/30
1/1 [==============================] - 0s 6ms/step - loss: 21125.9004
Epoch 9/30
1/1 [==============================] - 0s 7ms/step - loss: 21088.3848
Epoch 10/30
1/1 [==============================] - 0s 8ms/step - loss: 21069.5527
Epoch 11/30
1/1 [==============================] - 0s 6ms/step - loss: 21081.3926
Epoch 12/30
1/1 [==============================] - 0s 6ms/step - loss: 21035.0625
Epoch 13/30
1/1 [==============================] - 0s 5ms/step - loss: 21049.3613
Epoch 14/30
1/1 [==============================] - 0s 5ms/step - loss: 21029.3027
Epoch 15/30
1/1 [==============================] - 0s 5ms/step - loss: 20998.2598
Epoch 16/30
1/1 [==============================] - 0s 7ms/step - loss: 21021.0469
Epoch 17/30
1/1 [==============================] - 0s 5ms/step - loss: 21010.8301
Epoch 18/30
1/1 [==============================] - 0s 5ms/step - loss: 20995.4355
Epoch 19/30
1/1 [==============================] - 0s 6ms/step - loss: 20974.4629
Epoch 20/30
1/1 [==============================] - 0s 5ms/step - loss: 20975.2051
Epoch 21/30
1/1 [==============================] - 0s 5ms/step - loss: 20954.5176
Epoch 22/30
1/1 [==============================] - 0s 5ms/step - loss: 20931.6250
Epoch 23/30
1/1 [==============================] - 0s 5ms/step - loss: 20930.9453
Epoch 24/30
1/1 [==============================] - 0s 6ms/step - loss: 20861.2559
Epoch 25/30
1/1 [==============================] - 0s 6ms/step - loss: 20874.9043
Epoch 26/30
1/1 [==============================] - 0s 6ms/step - loss: 20902.3516
Epoch 27/30
1/1 [==============================] - 0s 8ms/step - loss: 20857.5879
Epoch 28/30
1/1 [==============================] - 0s 6ms/step - loss: 20857.0449
Epoch 29/30
1/1 [==============================] - 0s 7ms/step - loss: 20857.7520
Epoch 30/30
1/1 [==============================] - 0s 8ms/step - loss: 20815.6484
                  ds_orig    y
ds_new                        
2016-12-31 3   2016-09-01   21
2017-12-31 15  2017-09-01   28
2018-12-31 29  2018-11-01  219
2019-12-31 31  2019-01-01   37
2020-12-31 48  2020-07-01   34
2021-12-31 56  2021-06-01   27
Epoch 1/30
1/1 [==============================] - 3s 3s/step - loss: 12688.4199
Epoch 2/30
1/1 [==============================] - 0s 6ms/step - loss: 12659.1436
Epoch 3/30
1/1 [==============================] - 0s 5ms/step - loss: 12632.9893
Epoch 4/30
1/1 [==============================] - 0s 6ms/step - loss: 12621.5967
Epoch 5/30
1/1 [==============================] - 0s 5ms/step - loss: 12657.7139
Epoch 6/30
1/1 [==============================] - 0s 5ms/step - loss: 12619.1982
Epoch 7/30
1/1 [==============================] - 0s 7ms/step - loss: 12576.9375
Epoch 8/30
1/1 [==============================] - 0s 5ms/step - loss: 12568.8975
Epoch 9/30
1/1 [==============================] - 0s 5ms/step - loss: 12542.2715
Epoch 10/30
1/1 [==============================] - 0s 5ms/step - loss: 12543.8242
Epoch 11/30
1/1 [==============================] - 0s 5ms/step - loss: 12528.3682
Epoch 12/30
1/1 [==============================] - 0s 6ms/step - loss: 12524.0352
Epoch 13/30
1/1 [==============================] - 0s 5ms/step - loss: 12486.1396
Epoch 14/30
1/1 [==============================] - 0s 5ms/step - loss: 12475.4902
Epoch 15/30
1/1 [==============================] - 0s 6ms/step - loss: 12501.8203
Epoch 16/30
1/1 [==============================] - 0s 6ms/step - loss: 12464.9521
Epoch 17/30
1/1 [==============================] - 0s 5ms/step - loss: 12494.0283
Epoch 18/30
1/1 [==============================] - 0s 6ms/step - loss: 12452.4600
Epoch 19/30
1/1 [==============================] - 0s 5ms/step - loss: 12416.0205
Epoch 20/30
1/1 [==============================] - 0s 5ms/step - loss: 12418.7666
Epoch 21/30
1/1 [==============================] - 0s 6ms/step - loss: 12430.4590
Epoch 22/30
1/1 [==============================] - 0s 6ms/step - loss: 12463.5918
Epoch 23/30
1/1 [==============================] - 0s 6ms/step - loss: 12358.2354
Epoch 24/30
1/1 [==============================] - 0s 5ms/step - loss: 12355.5957
Epoch 25/30
1/1 [==============================] - 0s 5ms/step - loss: 12363.9561
Epoch 26/30
1/1 [==============================] - 0s 6ms/step - loss: 12324.6846
Epoch 27/30
1/1 [==============================] - 0s 5ms/step - loss: 12357.5088
Epoch 28/30
1/1 [==============================] - 0s 6ms/step - loss: 12324.7207
Epoch 29/30
1/1 [==============================] - 0s 5ms/step - loss: 12320.8730
Epoch 30/30
1/1 [==============================] - 0s 5ms/step - loss: 12324.1348
                  ds_orig   y
ds_new                       
2017-12-31 4   2017-06-01  40
2018-12-31 13  2018-03-01  43
2019-12-31 24  2019-02-01  46
2020-12-31 44  2020-11-01  44
2021-12-31 50  2021-06-01  10
Epoch 1/30
1/1 [==============================] - 2s 2s/step - loss: 1886.1960
Epoch 2/30
1/1 [==============================] - 0s 7ms/step - loss: 1882.0845
Epoch 3/30
1/1 [==============================] - 0s 7ms/step - loss: 1880.9705
Epoch 4/30
1/1 [==============================] - 0s 6ms/step - loss: 1870.4672
Epoch 5/30
1/1 [==============================] - 0s 5ms/step - loss: 1863.0278
Epoch 6/30
1/1 [==============================] - 0s 6ms/step - loss: 1857.0588
Epoch 7/30
1/1 [==============================] - 0s 5ms/step - loss: 1857.8097
Epoch 8/30
1/1 [==============================] - 0s 5ms/step - loss: 1847.3203
Epoch 9/30
1/1 [==============================] - 0s 5ms/step - loss: 1833.3953
Epoch 10/30
1/1 [==============================] - 0s 9ms/step - loss: 1845.6047
Epoch 11/30
1/1 [==============================] - 0s 6ms/step - loss: 1826.2349
Epoch 12/30
1/1 [==============================] - 0s 6ms/step - loss: 1838.9135
Epoch 13/30
1/1 [==============================] - 0s 6ms/step - loss: 1828.5477
Epoch 14/30
1/1 [==============================] - 0s 6ms/step - loss: 1824.9712
Epoch 15/30
1/1 [==============================] - 0s 7ms/step - loss: 1817.4939
Epoch 16/30
1/1 [==============================] - 0s 5ms/step - loss: 1801.7969
Epoch 17/30
1/1 [==============================] - 0s 5ms/step - loss: 1800.8901
Epoch 18/30
1/1 [==============================] - 0s 6ms/step - loss: 1785.0795
Epoch 19/30
1/1 [==============================] - 0s 7ms/step - loss: 1779.9589
Epoch 20/30
1/1 [==============================] - 0s 6ms/step - loss: 1797.8136
Epoch 21/30
1/1 [==============================] - 0s 7ms/step - loss: 1777.9838
Epoch 22/30
1/1 [==============================] - 0s 7ms/step - loss: 1769.2893
Epoch 23/30
1/1 [==============================] - 0s 6ms/step - loss: 1767.1208
Epoch 24/30
1/1 [==============================] - 0s 6ms/step - loss: 1765.4403
Epoch 25/30
1/1 [==============================] - 0s 6ms/step - loss: 1753.4735
Epoch 26/30
1/1 [==============================] - 0s 6ms/step - loss: 1748.7644
Epoch 27/30
1/1 [==============================] - 0s 6ms/step - loss: 1749.6602
Epoch 28/30
1/1 [==============================] - 0s 6ms/step - loss: 1740.8008
Epoch 29/30
1/1 [==============================] - 0s 6ms/step - loss: 1728.5815
Epoch 30/30
1/1 [==============================] - 0s 7ms/step - loss: 1716.7257
                  ds_orig   y
ds_new                       
2014-12-31 4   2014-10-01  29
           6   2014-12-01  29
2015-12-31 16  2015-10-01  76
2016-12-31 21  2016-03-01  52
2017-12-31 31  2017-01-01  42
2018-12-31 45  2018-03-01  15
2019-12-31 47  2019-08-01  12
2020-12-31 56  2020-05-01   7
2021-12-31 64  2021-01-01   7
           69  2021-06-01   7
Epoch 1/30
1/1 [==============================] - 2s 2s/step - loss: 1530.9866
Epoch 2/30
1/1 [==============================] - 0s 6ms/step - loss: 1528.2532
Epoch 3/30
1/1 [==============================] - 0s 6ms/step - loss: 1532.1580
Epoch 4/30
1/1 [==============================] - 0s 6ms/step - loss: 1521.5898
Epoch 5/30
1/1 [==============================] - 0s 5ms/step - loss: 1518.0095
Epoch 6/30
1/1 [==============================] - 0s 6ms/step - loss: 1519.9308
Epoch 7/30
1/1 [==============================] - 0s 6ms/step - loss: 1513.2712
Epoch 8/30
1/1 [==============================] - 0s 6ms/step - loss: 1509.9646
Epoch 9/30
1/1 [==============================] - 0s 5ms/step - loss: 1501.1609
Epoch 10/30
1/1 [==============================] - 0s 5ms/step - loss: 1505.7505
Epoch 11/30
1/1 [==============================] - 0s 5ms/step - loss: 1496.9011
Epoch 12/30
1/1 [==============================] - 0s 4ms/step - loss: 1486.4440
Epoch 13/30
1/1 [==============================] - 0s 4ms/step - loss: 1487.8616
Epoch 14/30
1/1 [==============================] - 0s 4ms/step - loss: 1482.9243
Epoch 15/30
1/1 [==============================] - 0s 4ms/step - loss: 1481.1794
Epoch 16/30
1/1 [==============================] - 0s 6ms/step - loss: 1478.1934
Epoch 17/30
1/1 [==============================] - 0s 5ms/step - loss: 1475.4177
Epoch 18/30
1/1 [==============================] - 0s 5ms/step - loss: 1470.8655
Epoch 19/30
1/1 [==============================] - 0s 5ms/step - loss: 1464.6187
Epoch 20/30
1/1 [==============================] - 0s 5ms/step - loss: 1467.2003
Epoch 21/30
1/1 [==============================] - 0s 5ms/step - loss: 1458.7715
Epoch 22/30
1/1 [==============================] - 0s 5ms/step - loss: 1455.8928
Epoch 23/30
1/1 [==============================] - 0s 4ms/step - loss: 1449.6932
Epoch 24/30
1/1 [==============================] - 0s 5ms/step - loss: 1453.3782
Epoch 25/30
1/1 [==============================] - 0s 5ms/step - loss: 1439.6702
Epoch 26/30
1/1 [==============================] - 0s 4ms/step - loss: 1439.9167
Epoch 27/30
1/1 [==============================] - 0s 5ms/step - loss: 1432.2539
Epoch 28/30
1/1 [==============================] - 0s 4ms/step - loss: 1435.2278
Epoch 29/30
1/1 [==============================] - 0s 5ms/step - loss: 1433.1921
Epoch 30/30
1/1 [==============================] - 0s 5ms/step - loss: 1430.3213
                  ds_orig    y
ds_new                        
2018-12-31 1   2018-11-01  178
2019-12-31 13  2019-11-01  193
2020-12-31 16  2020-02-01  299
2021-12-31 31  2021-05-01  291
Epoch 1/30
1/1 [==============================] - 2s 2s/step - loss: 52750.5820
Epoch 2/30
1/1 [==============================] - 0s 7ms/step - loss: 52790.8711
Epoch 3/30
1/1 [==============================] - 0s 6ms/step - loss: 52735.7227
Epoch 4/30
1/1 [==============================] - 0s 6ms/step - loss: 52709.0469
Epoch 5/30
1/1 [==============================] - 0s 6ms/step - loss: 52682.9219
Epoch 6/30
1/1 [==============================] - 0s 6ms/step - loss: 52695.6992
Epoch 7/30
1/1 [==============================] - 0s 6ms/step - loss: 52662.8242
Epoch 8/30
1/1 [==============================] - 0s 6ms/step - loss: 52648.8320
Epoch 9/30
1/1 [==============================] - 0s 6ms/step - loss: 52629.8008
Epoch 10/30
1/1 [==============================] - 0s 7ms/step - loss: 52549.2617
Epoch 11/30
1/1 [==============================] - 0s 7ms/step - loss: 52590.3867
Epoch 12/30
1/1 [==============================] - 0s 6ms/step - loss: 52537.3906
Epoch 13/30
1/1 [==============================] - 0s 6ms/step - loss: 52545.3633
Epoch 14/30
1/1 [==============================] - 0s 6ms/step - loss: 52556.2500
Epoch 15/30
1/1 [==============================] - 0s 6ms/step - loss: 52439.8477
Epoch 16/30
1/1 [==============================] - 0s 6ms/step - loss: 52448.4375
Epoch 17/30
1/1 [==============================] - 0s 5ms/step - loss: 52479.0625
Epoch 18/30
1/1 [==============================] - 0s 6ms/step - loss: 52458.5625
Epoch 19/30
1/1 [==============================] - 0s 7ms/step - loss: 52431.2227
Epoch 20/30
1/1 [==============================] - 0s 7ms/step - loss: 52320.3477
Epoch 21/30
1/1 [==============================] - 0s 6ms/step - loss: 52363.5508
Epoch 22/30
1/1 [==============================] - 0s 5ms/step - loss: 52353.5430
Epoch 23/30
1/1 [==============================] - 0s 5ms/step - loss: 52267.8125
Epoch 24/30
1/1 [==============================] - 0s 5ms/step - loss: 52336.2852
Epoch 25/30
1/1 [==============================] - 0s 5ms/step - loss: 52317.0469
Epoch 26/30
1/1 [==============================] - 0s 5ms/step - loss: 52251.5000
Epoch 27/30
1/1 [==============================] - 0s 4ms/step - loss: 52248.7070
Epoch 28/30
1/1 [==============================] - 0s 5ms/step - loss: 52268.5508
Epoch 29/30
1/1 [==============================] - 0s 5ms/step - loss: 52212.7656
Epoch 30/30
1/1 [==============================] - 0s 6ms/step - loss: 52164.3086
#### REQ8-4). Plot the created issues forecast
In [ ]:
def k4(reponame):
    df = dfs[dfs['repo'] ==reponame]

    df = df.groupby('created_at')['created_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds'])

    df_final = pd.DataFrame(['ds','y'])
    df_final = pdf[['ds_orig','y']]
    df_final.columns = ['timestamp','value']
    
    firstDay = min(pd.to_datetime(df_final['timestamp']))

    df_final['time'] = [float(x.days) for x in \
                              [x - firstDay for x in pd.to_datetime(df_final['timestamp'])]]
    return df_final
In [490]:
for i in range(5):
    plot_and_train(k4(dfs['repo'].unique()[i]))
Epoch 1/30
26/26 [==============================] - 2s 5ms/step - loss: 12.4158
Epoch 2/30
26/26 [==============================] - 0s 4ms/step - loss: 8.8909
Epoch 3/30
26/26 [==============================] - 0s 4ms/step - loss: 8.4112
Epoch 4/30
26/26 [==============================] - 0s 4ms/step - loss: 8.3992
Epoch 5/30
26/26 [==============================] - 0s 4ms/step - loss: 8.3049
Epoch 6/30
26/26 [==============================] - 0s 4ms/step - loss: 8.3289
Epoch 7/30
26/26 [==============================] - 0s 4ms/step - loss: 8.2537
Epoch 8/30
26/26 [==============================] - 0s 4ms/step - loss: 8.1962
Epoch 9/30
26/26 [==============================] - 0s 4ms/step - loss: 8.1856
Epoch 10/30
26/26 [==============================] - 0s 4ms/step - loss: 8.1767
Epoch 11/30
26/26 [==============================] - 0s 4ms/step - loss: 8.1561
Epoch 12/30
26/26 [==============================] - 0s 4ms/step - loss: 8.0360
Epoch 13/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9757
Epoch 14/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9456
Epoch 15/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9509
Epoch 16/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9493
Epoch 17/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9753
Epoch 18/30
26/26 [==============================] - 0s 4ms/step - loss: 7.8558
Epoch 19/30
26/26 [==============================] - 0s 4ms/step - loss: 7.7772
Epoch 20/30
26/26 [==============================] - 0s 4ms/step - loss: 7.8784
Epoch 21/30
26/26 [==============================] - 0s 4ms/step - loss: 8.0094
Epoch 22/30
26/26 [==============================] - 0s 4ms/step - loss: 7.8180
Epoch 23/30
26/26 [==============================] - 0s 4ms/step - loss: 7.8393
Epoch 24/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9391
Epoch 25/30
26/26 [==============================] - 0s 5ms/step - loss: 7.7241
Epoch 26/30
26/26 [==============================] - 0s 4ms/step - loss: 7.7861
Epoch 27/30
26/26 [==============================] - 0s 4ms/step - loss: 7.7549
Epoch 28/30
26/26 [==============================] - 0s 4ms/step - loss: 7.6759
Epoch 29/30
26/26 [==============================] - 0s 4ms/step - loss: 7.8657
Epoch 30/30
26/26 [==============================] - 0s 4ms/step - loss: 7.9070
Epoch 1/30
18/18 [==============================] - 2s 5ms/step - loss: 0.9510
Epoch 2/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6510
Epoch 3/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6786
Epoch 4/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6597
Epoch 5/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6554
Epoch 6/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6520
Epoch 7/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6168
Epoch 8/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6293
Epoch 9/30
18/18 [==============================] - 0s 5ms/step - loss: 0.6315
Epoch 10/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6391
Epoch 11/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6073
Epoch 12/30
18/18 [==============================] - 0s 5ms/step - loss: 0.6244
Epoch 13/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6086
Epoch 14/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6372
Epoch 15/30
18/18 [==============================] - 0s 5ms/step - loss: 0.6187
Epoch 16/30
18/18 [==============================] - 0s 5ms/step - loss: 0.6133
Epoch 17/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6369
Epoch 18/30
18/18 [==============================] - 0s 5ms/step - loss: 0.6186
Epoch 19/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6329
Epoch 20/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6049
Epoch 21/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6147
Epoch 22/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6169
Epoch 23/30
18/18 [==============================] - 0s 5ms/step - loss: 0.6086
Epoch 24/30
18/18 [==============================] - 0s 4ms/step - loss: 0.5950
Epoch 25/30
18/18 [==============================] - 0s 4ms/step - loss: 0.5994
Epoch 26/30
18/18 [==============================] - 0s 4ms/step - loss: 0.5905
Epoch 27/30
18/18 [==============================] - 0s 4ms/step - loss: 0.5917
Epoch 28/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6012
Epoch 29/30
18/18 [==============================] - 0s 4ms/step - loss: 0.5935
Epoch 30/30
18/18 [==============================] - 0s 4ms/step - loss: 0.6160
Epoch 1/30
15/15 [==============================] - 2s 5ms/step - loss: 4.1128
Epoch 2/30
15/15 [==============================] - 0s 4ms/step - loss: 1.7058
Epoch 3/30
15/15 [==============================] - 0s 4ms/step - loss: 1.3660
Epoch 4/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2856
Epoch 5/30
15/15 [==============================] - 0s 4ms/step - loss: 1.3332
Epoch 6/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2633
Epoch 7/30
15/15 [==============================] - 0s 5ms/step - loss: 1.3141
Epoch 8/30
15/15 [==============================] - 0s 4ms/step - loss: 1.3183
Epoch 9/30
15/15 [==============================] - 0s 5ms/step - loss: 1.3091
Epoch 10/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2423
Epoch 11/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2628
Epoch 12/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2983
Epoch 13/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2486
Epoch 14/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2171
Epoch 15/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2525
Epoch 16/30
15/15 [==============================] - 0s 5ms/step - loss: 1.3002
Epoch 17/30
15/15 [==============================] - 0s 5ms/step - loss: 1.2420
Epoch 18/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2880
Epoch 19/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2127
Epoch 20/30
15/15 [==============================] - 0s 5ms/step - loss: 1.2392
Epoch 21/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2492
Epoch 22/30
15/15 [==============================] - 0s 5ms/step - loss: 1.2321
Epoch 23/30
15/15 [==============================] - 0s 5ms/step - loss: 1.2221
Epoch 24/30
15/15 [==============================] - 0s 5ms/step - loss: 1.2793
Epoch 25/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2132
Epoch 26/30
15/15 [==============================] - 0s 5ms/step - loss: 1.2393
Epoch 27/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2196
Epoch 28/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2230
Epoch 29/30
15/15 [==============================] - 0s 4ms/step - loss: 1.2142
Epoch 30/30
15/15 [==============================] - 0s 4ms/step - loss: 1.1920
Epoch 1/30
18/18 [==============================] - 2s 5ms/step - loss: 4.1713
Epoch 2/30
18/18 [==============================] - 0s 5ms/step - loss: 1.2315
Epoch 3/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8706
Epoch 4/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8789
Epoch 5/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8460
Epoch 6/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8351
Epoch 7/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8347
Epoch 8/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8317
Epoch 9/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8502
Epoch 10/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8440
Epoch 11/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8504
Epoch 12/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8340
Epoch 13/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8609
Epoch 14/30
18/18 [==============================] - 0s 5ms/step - loss: 0.8159
Epoch 15/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8254
Epoch 16/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8131
Epoch 17/30
18/18 [==============================] - 0s 4ms/step - loss: 0.7905
Epoch 18/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8328
Epoch 19/30
18/18 [==============================] - 0s 5ms/step - loss: 0.8651
Epoch 20/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8556
Epoch 21/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8532
Epoch 22/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8065
Epoch 23/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8305
Epoch 24/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8091
Epoch 25/30
18/18 [==============================] - 0s 4ms/step - loss: 0.7912
Epoch 26/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8329
Epoch 27/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8195
Epoch 28/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8209
Epoch 29/30
18/18 [==============================] - 0s 4ms/step - loss: 0.7866
Epoch 30/30
18/18 [==============================] - 0s 4ms/step - loss: 0.8181
Epoch 1/30
28/28 [==============================] - 2s 5ms/step - loss: 50.6463
Epoch 2/30
28/28 [==============================] - 0s 4ms/step - loss: 25.5139
Epoch 3/30
28/28 [==============================] - 0s 4ms/step - loss: 19.1216
Epoch 4/30
28/28 [==============================] - 0s 4ms/step - loss: 18.2795
Epoch 5/30
28/28 [==============================] - 0s 4ms/step - loss: 18.4022
Epoch 6/30
28/28 [==============================] - 0s 5ms/step - loss: 18.1270
Epoch 7/30
28/28 [==============================] - 0s 4ms/step - loss: 18.4402
Epoch 8/30
28/28 [==============================] - 0s 4ms/step - loss: 18.2399
Epoch 9/30
28/28 [==============================] - 0s 4ms/step - loss: 17.7879
Epoch 10/30
28/28 [==============================] - 0s 5ms/step - loss: 17.8086
Epoch 11/30
28/28 [==============================] - 0s 4ms/step - loss: 17.5698
Epoch 12/30
28/28 [==============================] - 0s 4ms/step - loss: 17.8778
Epoch 13/30
28/28 [==============================] - 0s 4ms/step - loss: 17.6844
Epoch 14/30
28/28 [==============================] - 0s 5ms/step - loss: 17.5430
Epoch 15/30
28/28 [==============================] - 0s 4ms/step - loss: 17.4135
Epoch 16/30
28/28 [==============================] - 0s 4ms/step - loss: 17.6009
Epoch 17/30
28/28 [==============================] - 0s 4ms/step - loss: 17.4755
Epoch 18/30
28/28 [==============================] - 0s 4ms/step - loss: 17.3652
Epoch 19/30
28/28 [==============================] - 0s 4ms/step - loss: 17.5284
Epoch 20/30
28/28 [==============================] - 0s 4ms/step - loss: 17.3431
Epoch 21/30
28/28 [==============================] - 0s 4ms/step - loss: 17.2852
Epoch 22/30
28/28 [==============================] - 0s 4ms/step - loss: 17.3109
Epoch 23/30
28/28 [==============================] - 0s 4ms/step - loss: 17.1911
Epoch 24/30
28/28 [==============================] - 0s 4ms/step - loss: 17.1261
Epoch 25/30
28/28 [==============================] - 0s 4ms/step - loss: 17.0597
Epoch 26/30
28/28 [==============================] - 0s 4ms/step - loss: 17.3840
Epoch 27/30
28/28 [==============================] - 0s 4ms/step - loss: 16.9517
Epoch 28/30
28/28 [==============================] - 0s 4ms/step - loss: 17.2919
Epoch 29/30
28/28 [==============================] - 0s 4ms/step - loss: 17.0184
Epoch 30/30
28/28 [==============================] - 0s 4ms/step - loss: 17.2160
#### REQ8-5). Plot the closed issues forecast
In [ ]:
def k5(reponame):

    df = dfs6[dfs6['repo'] ==reponame]

    df = df.groupby('closed_at')['closed_at']
    df_new = df.describe()
    s1 = pd.Series(df_new['top'], name='ds_orig')
    df_new = pd.concat([df_new, s1], axis=1)
    pdf = pd.DataFrame(['ds','ds_orig','y'])
    pdf = df_new[['top','ds_orig','count']]
    pdf.columns = ['ds','ds_orig','y']
    pdf['ds_new'] = pd.to_datetime(pdf['ds'])

    df_final = pd.DataFrame(['ds','y'])
    df_final = pdf[['ds_orig','y']]
    df_final.columns = ['timestamp','value']
    
    firstDay = min(pd.to_datetime(df_final['timestamp']))

    df_final['time'] = [float(x.days) for x in \
                              [x - firstDay for x in pd.to_datetime(df_final['timestamp'])]]
    return df_final
In [487]:
for i in range(5):
    plot_and_train(k5(dfs['repo'].unique()[i]))
Epoch 1/30
24/24 [==============================] - 2s 4ms/step - loss: 17.2280
Epoch 2/30
24/24 [==============================] - 0s 4ms/step - loss: 10.0193
Epoch 3/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9911
Epoch 4/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9415
Epoch 5/30
24/24 [==============================] - 0s 4ms/step - loss: 9.1218
Epoch 6/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8811
Epoch 7/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9682
Epoch 8/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8696
Epoch 9/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9168
Epoch 10/30
24/24 [==============================] - 0s 4ms/step - loss: 9.0876
Epoch 11/30
24/24 [==============================] - 0s 4ms/step - loss: 8.7843
Epoch 12/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9226
Epoch 13/30
24/24 [==============================] - 0s 5ms/step - loss: 8.7900
Epoch 14/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9385
Epoch 15/30
24/24 [==============================] - 0s 5ms/step - loss: 8.9081
Epoch 16/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9308
Epoch 17/30
24/24 [==============================] - 0s 4ms/step - loss: 8.9300
Epoch 18/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8401
Epoch 19/30
24/24 [==============================] - 0s 4ms/step - loss: 8.7916
Epoch 20/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8162
Epoch 21/30
24/24 [==============================] - 0s 5ms/step - loss: 8.8432
Epoch 22/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8841
Epoch 23/30
24/24 [==============================] - 0s 5ms/step - loss: 8.8078
Epoch 24/30
24/24 [==============================] - 0s 5ms/step - loss: 8.6432
Epoch 25/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8110
Epoch 26/30
24/24 [==============================] - 0s 5ms/step - loss: 8.7156
Epoch 27/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8725
Epoch 28/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8172
Epoch 29/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8447
Epoch 30/30
24/24 [==============================] - 0s 4ms/step - loss: 8.8299
Epoch 1/30
11/11 [==============================] - 2s 5ms/step - loss: 71.9499
Epoch 2/30
11/11 [==============================] - 0s 5ms/step - loss: 69.3065
Epoch 3/30
11/11 [==============================] - 0s 5ms/step - loss: 66.4799
Epoch 4/30
11/11 [==============================] - 0s 4ms/step - loss: 66.4867
Epoch 5/30
11/11 [==============================] - 0s 4ms/step - loss: 66.6096
Epoch 6/30
11/11 [==============================] - 0s 4ms/step - loss: 66.3014
Epoch 7/30
11/11 [==============================] - 0s 4ms/step - loss: 66.6281
Epoch 8/30
11/11 [==============================] - 0s 4ms/step - loss: 66.2579
Epoch 9/30
11/11 [==============================] - 0s 4ms/step - loss: 65.8056
Epoch 10/30
11/11 [==============================] - 0s 4ms/step - loss: 66.0836
Epoch 11/30
11/11 [==============================] - 0s 4ms/step - loss: 66.1505
Epoch 12/30
11/11 [==============================] - 0s 4ms/step - loss: 66.1311
Epoch 13/30
11/11 [==============================] - 0s 4ms/step - loss: 66.4883
Epoch 14/30
11/11 [==============================] - 0s 4ms/step - loss: 65.9097
Epoch 15/30
11/11 [==============================] - 0s 4ms/step - loss: 65.8703
Epoch 16/30
11/11 [==============================] - 0s 4ms/step - loss: 65.8952
Epoch 17/30
11/11 [==============================] - 0s 4ms/step - loss: 66.2336
Epoch 18/30
11/11 [==============================] - 0s 4ms/step - loss: 66.1128
Epoch 19/30
11/11 [==============================] - 0s 4ms/step - loss: 65.9504
Epoch 20/30
11/11 [==============================] - 0s 5ms/step - loss: 66.2385
Epoch 21/30
11/11 [==============================] - 0s 4ms/step - loss: 66.2303
Epoch 22/30
11/11 [==============================] - 0s 5ms/step - loss: 66.2653
Epoch 23/30
11/11 [==============================] - 0s 4ms/step - loss: 65.9358
Epoch 24/30
11/11 [==============================] - 0s 5ms/step - loss: 66.0690
Epoch 25/30
11/11 [==============================] - 0s 4ms/step - loss: 65.8511
Epoch 26/30
11/11 [==============================] - 0s 5ms/step - loss: 65.9699
Epoch 27/30
11/11 [==============================] - 0s 4ms/step - loss: 66.4178
Epoch 28/30
11/11 [==============================] - 0s 4ms/step - loss: 66.2024
Epoch 29/30
11/11 [==============================] - 0s 4ms/step - loss: 66.1910
Epoch 30/30
11/11 [==============================] - 0s 4ms/step - loss: 66.0605
Epoch 1/30
10/10 [==============================] - 2s 5ms/step - loss: 8.9292
Epoch 2/30
10/10 [==============================] - 0s 4ms/step - loss: 6.9470
Epoch 3/30
10/10 [==============================] - 0s 4ms/step - loss: 5.8643
Epoch 4/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6360
Epoch 5/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6860
Epoch 6/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7387
Epoch 7/30
10/10 [==============================] - 0s 5ms/step - loss: 5.7698
Epoch 8/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7612
Epoch 9/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7114
Epoch 10/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6464
Epoch 11/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7188
Epoch 12/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7842
Epoch 13/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6384
Epoch 14/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6265
Epoch 15/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7127
Epoch 16/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6404
Epoch 17/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7008
Epoch 18/30
10/10 [==============================] - 0s 4ms/step - loss: 5.8206
Epoch 19/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6586
Epoch 20/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6355
Epoch 21/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7620
Epoch 22/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6820
Epoch 23/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7225
Epoch 24/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6736
Epoch 25/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6096
Epoch 26/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6549
Epoch 27/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6557
Epoch 28/30
10/10 [==============================] - 0s 4ms/step - loss: 5.6216
Epoch 29/30
10/10 [==============================] - 0s 4ms/step - loss: 5.7134
Epoch 30/30
10/10 [==============================] - 0s 4ms/step - loss: 5.5614
Epoch 1/30
14/14 [==============================] - 2s 5ms/step - loss: 12.3022
Epoch 2/30
14/14 [==============================] - 0s 5ms/step - loss: 10.7910
Epoch 3/30
14/14 [==============================] - 0s 5ms/step - loss: 10.6892
Epoch 4/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5773
Epoch 5/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5843
Epoch 6/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5515
Epoch 7/30
14/14 [==============================] - 0s 4ms/step - loss: 10.6324
Epoch 8/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5889
Epoch 9/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5843
Epoch 10/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5757
Epoch 11/30
14/14 [==============================] - 0s 5ms/step - loss: 10.4919
Epoch 12/30
14/14 [==============================] - 0s 4ms/step - loss: 10.3575
Epoch 13/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5638
Epoch 14/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5706
Epoch 15/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5260
Epoch 16/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5173
Epoch 17/30
14/14 [==============================] - 0s 4ms/step - loss: 10.3694
Epoch 18/30
14/14 [==============================] - 0s 4ms/step - loss: 10.4736
Epoch 19/30
14/14 [==============================] - 0s 4ms/step - loss: 10.3999
Epoch 20/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5394
Epoch 21/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5292
Epoch 22/30
14/14 [==============================] - 0s 4ms/step - loss: 10.4713
Epoch 23/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5423
Epoch 24/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5402
Epoch 25/30
14/14 [==============================] - 0s 5ms/step - loss: 10.5475
Epoch 26/30
14/14 [==============================] - 0s 4ms/step - loss: 10.4522
Epoch 27/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5995
Epoch 28/30
14/14 [==============================] - 0s 4ms/step - loss: 10.5846
Epoch 29/30
14/14 [==============================] - 0s 4ms/step - loss: 10.4304
Epoch 30/30
14/14 [==============================] - 0s 4ms/step - loss: 10.4471
Epoch 1/30
26/26 [==============================] - 2s 5ms/step - loss: 59.2835
Epoch 2/30
26/26 [==============================] - 0s 4ms/step - loss: 36.7836
Epoch 3/30
26/26 [==============================] - 0s 4ms/step - loss: 23.8981
Epoch 4/30
26/26 [==============================] - 0s 4ms/step - loss: 22.5529
Epoch 5/30
26/26 [==============================] - 0s 4ms/step - loss: 22.6094
Epoch 6/30
26/26 [==============================] - 0s 5ms/step - loss: 22.5058
Epoch 7/30
26/26 [==============================] - 0s 4ms/step - loss: 22.5513
Epoch 8/30
26/26 [==============================] - 0s 4ms/step - loss: 22.6628
Epoch 9/30
26/26 [==============================] - 0s 4ms/step - loss: 22.2868
Epoch 10/30
26/26 [==============================] - 0s 4ms/step - loss: 22.2767
Epoch 11/30
26/26 [==============================] - 0s 4ms/step - loss: 22.4944
Epoch 12/30
26/26 [==============================] - 0s 4ms/step - loss: 22.2507
Epoch 13/30
26/26 [==============================] - 0s 4ms/step - loss: 22.5445
Epoch 14/30
26/26 [==============================] - 0s 4ms/step - loss: 22.2991
Epoch 15/30
26/26 [==============================] - 0s 4ms/step - loss: 22.3727
Epoch 16/30
26/26 [==============================] - 0s 4ms/step - loss: 22.2287
Epoch 17/30
26/26 [==============================] - 0s 4ms/step - loss: 22.4616
Epoch 18/30
26/26 [==============================] - 0s 4ms/step - loss: 22.3937
Epoch 19/30
26/26 [==============================] - 0s 4ms/step - loss: 22.3577
Epoch 20/30
26/26 [==============================] - 0s 4ms/step - loss: 22.4429
Epoch 21/30
26/26 [==============================] - 0s 4ms/step - loss: 22.5724
Epoch 22/30
26/26 [==============================] - 0s 4ms/step - loss: 22.3399
Epoch 23/30
26/26 [==============================] - 0s 4ms/step - loss: 22.1661
Epoch 24/30
26/26 [==============================] - 0s 4ms/step - loss: 22.3228
Epoch 25/30
26/26 [==============================] - 0s 4ms/step - loss: 22.3715
Epoch 26/30
26/26 [==============================] - 0s 4ms/step - loss: 22.1556
Epoch 27/30
26/26 [==============================] - 0s 4ms/step - loss: 22.4790
Epoch 28/30
26/26 [==============================] - 0s 4ms/step - loss: 22.0194
Epoch 29/30
26/26 [==============================] - 0s 4ms/step - loss: 22.1329
Epoch 30/30
26/26 [==============================] - 0s 4ms/step - loss: 22.2931
#### REQ8-6). Plot the pulls forecast
In [ ]:
#Add your code for requirement 8.6 in this cell
#### REQ8-7). Plot the commits forecast
In [ ]:
#Add your code for requirement 8.7 in this cell
#### REQ8-8). Plot the branches forecast
In [ ]:
#Add your code for requirement 8.8 in this cell
#### REQ8-9). Plot the contributors forecast
In [ ]:
#Add your code for requirement 8.9 in this cell
#### REQ8-10). Plot the releases forecast
In [ ]:
#Add your code for requirement 8.10 in this cell


## REQUIREMENT 8 (STATSMODEL) - Use StatsModel package to forecast the following for every repository
In [ ]:
import statsmodels.api as sm
In [ ]:
def plot_arima(df):
    model = sm.tsa.ARIMA(np.asarray(df['value']),order=(1,0,0))
    results = model.fit()
    df['forecast'] = results.fittedvalues
    df[['value','forecast']].plot(figsize=(16,12))
#### REQ8-1). The day of the week maximum number of issues created
In [ ]:
def s1(reponame):
    df = dfs[dfs['repo'] ==reponame]
    df = df.groupby('created_at')['created_at']
    df_new = df.describe()
    dfnew1 = pd.Series(df_new['top'], name='ds_original')
    df_new = pd.concat([df_new, dfnew1], axis=1)
    datafrm_pdf = pd.DataFrame(['ds','ds_original','y'])
    datafrm_pdf = df_new[['top','ds_original','count']]
    datafrm_pdf.columns = ['ds','ds_original','y']
    datafrm_pdf['ds_new'] = pd.to_datetime(datafrm_pdf['ds']) - pd.to_timedelta(7, unit='d')
    df_weekly_maximum = datafrm_pdf.reset_index().groupby([pd.Grouper(key='ds_new', freq='W-MON')]).apply(lambda x: x.loc[x.y == x.y.max(),['ds_original','y']])
    df_created_output = pd.DataFrame(['ds','y'])
    df_created_output = df_weekly_maximum[['ds_original','y']]
    df_created_output.columns = ['ds','y']
    tensor_Created = df_created_output
    tensor_Created = tensor_Created[['ds','y']]
    df = pd.DataFrame(tensor_Created)
    tensor_Created.rename(columns={'ds':'timestamp'}, inplace=True)
    tensor_Created.rename(columns={'y':'value'}, inplace=True)
    # print(tensor_Created)

    firstDay = min(pd.to_datetime(tensor_Created['timestamp']))

    tensor_Created['time'] = [float(x.days) for x in \
                              [x - firstDay for x in pd.to_datetime(tensor_Created['timestamp'])]]
    return tensor_Created
In [489]:
for i in range(5):
    plot_arima(s1(dfs['repo'].unique()[i]))
CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.00231D+00    |proj g|=  6.61249D-03

At iterate    5    f=  1.00230D+00    |proj g|=  4.44089D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      7      1     0     0   4.441D-08   1.002D+00
  F =   1.0023012096178279     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.49041D+00    |proj g|=  3.77787D-04

At iterate    5    f=  1.49041D+00    |proj g|=  2.22045D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      7      1     0     0   2.220D-08   1.490D+00
  F =   1.4904081681484020     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.21027D+00    |proj g|=  9.99978D-04

At iterate    5    f=  1.21027D+00    |proj g|=  8.88178D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      6      8      1     0     0   2.220D-08   1.210D+00
  F =   1.2102729075654812     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.72903D+00    |proj g|=  3.14415D-04

At iterate    5    f=  2.72902D+00    |proj g|=  0.00000D+00

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      7      1     0     0   0.000D+00   2.729D+00
  F =   2.7290231634489941     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.48925D+00    |proj g|=  3.55849D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      4      6      1     0     0   0.000D+00   2.489D+00
  F =   2.4892494745459945     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.28527D+00    |proj g|=  9.82636D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      4      6      1     0     0   0.000D+00   2.285D+00
  F =   2.2852653783830950     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.36152D+00    |proj g|=  4.69003D-04

At iterate    5    f=  2.36152D+00    |proj g|=  0.00000D+00

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      7      1     0     0   0.000D+00   2.362D+00
  F =   2.3615240105152551     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.48765D+00    |proj g|=  6.35492D-05

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      4      6      1     0     0   0.000D+00   2.488D+00
  F =   2.4876541128645582     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.97812D+00    |proj g|=  1.41442D-04

At iterate    5    f=  2.97812D+00    |proj g|=  4.44089D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      6      8      1     0     0   0.000D+00   2.978D+00
  F =   2.9781200341588168     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.47522D+00    |proj g|=  2.19376D-03

At iterate    5    f=  2.47515D+00    |proj g|=  4.44089D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
#### REQ8-2). The day of the week maximum number of issues closed
In [491]:
for i in range(5):
    plot_arima(k2(dfs['repo'].unique()[i]))
#### REQ8-3). The month of the year that has maximum number of issues closed
In [492]:
for i in range(5):
    plot_arima(k3(dfs['repo'].unique()[i]))
                  ds_orig    y
ds_new                        
2018-12-31 1   2018-11-01  154
2019-12-31 8   2019-06-01  148
2020-12-31 19  2020-05-01  135
2021-12-31 31  2021-05-01  204
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      6     12      1     0     0   0.000D+00   2.475D+00
  F =   2.4751455753818767     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.19871D+00    |proj g|=  8.25140D-03

At iterate    5    f=  1.19862D+00    |proj g|=  8.88178D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      6      8      1     0     0   2.220D-08   1.199D+00
  F =   1.1986245378645042     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.38866D+00    |proj g|=  6.55163D-03

At iterate    5    f=  1.38841D+00    |proj g|=  2.22045D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5     11      1     0     0   2.220D-08   1.388D+00
  F =   1.3884128384952434     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.24257D+00    |proj g|=  1.00990D-03

At iterate    5    f=  1.24257D+00    |proj g|=  0.00000D+00

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      7      1     0     0   0.000D+00   1.243D+00
  F =   1.2425681160610045     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.58057D+00    |proj g|=  5.04325D-03

At iterate    5    f=  2.58027D+00    |proj g|=  7.28306D-06

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      7     10      1     0     0   4.441D-08   2.580D+00
  F =   2.5802691455749467     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.51561D+00    |proj g|=  1.55693D-03

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      4      7      1     0     0   0.000D+00   2.516D+00
  F =   2.5155687793120496     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  3.60167D+00    |proj g|=  3.60156D-05

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      3      5      1     0     0   0.000D+00   3.602D+00
  F =   3.6016730858487187     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.51050D+00    |proj g|=  7.31504D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      4      6      1     0     0   0.000D+00   2.510D+00
  F =   2.5104966502542587     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.71668D+00    |proj g|=  8.18456D-05

At iterate    5    f=  2.71668D+00    |proj g|=  0.00000D+00

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      8      1     0     0   0.000D+00   2.717D+00
  F =   2.7166830815534944     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  3.24032D+00    |proj g|=  7.36078D-04

At iterate    5    f=  3.24032D+00    |proj g|=  6.35048D-06

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      7      9      1     0     0   0.000D+00   3.240D+00
  F =   3.2403197250535545     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

                  ds_orig    y
ds_new                        
2016-12-31 3   2016-09-01   21
2017-12-31 15  2017-09-01   28
2018-12-31 29  2018-11-01  219
2019-12-31 31  2019-01-01   37
2020-12-31 48  2020-07-01   34
2021-12-31 56  2021-06-01   27
                  ds_orig   y
ds_new                       
2017-12-31 4   2017-06-01  40
2018-12-31 13  2018-03-01  43
2019-12-31 24  2019-02-01  46
2020-12-31 44  2020-11-01  44
2021-12-31 50  2021-06-01  10
                  ds_orig   y
ds_new                       
2014-12-31 4   2014-10-01  29
           6   2014-12-01  29
2015-12-31 16  2015-10-01  76
2016-12-31 21  2016-03-01  52
2017-12-31 31  2017-01-01  42
2018-12-31 45  2018-03-01  15
2019-12-31 47  2019-08-01  12
2020-12-31 56  2020-05-01   7
2021-12-31 64  2021-01-01   7
           69  2021-06-01   7
                  ds_orig    y
ds_new                        
2018-12-31 1   2018-11-01  178
2019-12-31 13  2019-11-01  193
2020-12-31 16  2020-02-01  299
2021-12-31 31  2021-05-01  291
#### REQ8-4). Plot the created issues forecast
In [493]:
for i in range(5):
    plot_arima(k4(dfs['repo'].unique()[i]))
At iterate    0    f=  6.34853D+00    |proj g|=  1.17552D-01

At iterate    5    f=  4.58310D+00    |proj g|=  5.99076D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      8     16      1     0     0   0.000D+00   4.583D+00
  F =   4.5830845630228509     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  5.66381D+00    |proj g|=  1.02003D-02

At iterate    5    f=  5.66327D+00    |proj g|=  9.73017D-03

At iterate   10    f=  5.65915D+00    |proj g|=  3.07665D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2     13     16      1     0     0   0.000D+00   5.659D+00
  F =   5.6591516192757583     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  5.24686D+00    |proj g|=  9.98161D-02

At iterate    5    f=  4.01263D+00    |proj g|=  4.80771D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      8     14      1     0     0   0.000D+00   4.013D+00
  F =   4.0126291443313606     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  4.30622D+00    |proj g|=  2.98853D-02

At iterate    5    f=  4.30209D+00    |proj g|=  1.06113D-02

At iterate   10    f=  4.30139D+00    |proj g|=  4.44089D-07

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2     11     14      1     0     0   0.000D+00   4.301D+00
  F =   4.3013871465137417     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  5.58798D+00    |proj g|=  1.04811D-01

At iterate    5    f=  5.38783D+00    |proj g|=  5.12507D-02

At iterate   10    f=  5.37755D+00    |proj g|=  8.88178D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2     13     23      1     0     0   0.000D+00   5.378D+00
  F =   5.3775527819568829     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.40331D+00    |proj g|=  2.27240D-04

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      4      6      1     0     0   0.000D+00   2.403D+00
  F =   2.4033083930313026     

CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL            
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.11707D+00    |proj g|=  1.14739D-03

At iterate    5    f=  1.11707D+00    |proj g|=  4.44089D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      6      9      1     0     0   2.220D-08   1.117D+00
  F =   1.1170656984462288     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.49041D+00    |proj g|=  3.77787D-04

At iterate    5    f=  1.49041D+00    |proj g|=  2.22045D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      5      7      1     0     0   2.220D-08   1.490D+00
  F =   1.4904081681484020     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  1.21027D+00    |proj g|=  9.99978D-04

At iterate    5    f=  1.21027D+00    |proj g|=  8.88178D-08

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N    Tit     Tnf  Tnint  Skip  Nact     Projg        F
    2      6      8      1     0     0   2.220D-08   1.210D+00
  F =   1.2102729075654812     

CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH             
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 2.220D-16
 N =            2     M =           12

At X0         0 variables are exactly at the bounds

At iterate    0    f=  2.72903D+00    |proj g|=  3.14415D-04

At iterate    5    f=  2.72902D+00    |proj g|=  0.00000D+00

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
#### REQ8-5). Plot the closed issues forecast
In [494]:
for i in range(5):
    plot_arima(k5(dfs['repo'].unique()[i]))
#### REQ8-6). Plot the pulls forecast
In [ ]:
#Add your code for requirement 8.6 in this cell
#### REQ8-7). Plot the commits forecast
In [ ]:
#Add your code for requirement 8.7 in this cell
#### REQ8-8). Plot the branches forecast
In [ ]:
#Add your code for requirement 8.8 in this cell
#### REQ8-9). Plot the contributors forecast
In [ ]:
#Add your code for requirement 8.9 in this cell
#### REQ8-10). Plot the releases forecast
In [ ]:
#Add your code for requirement 8.10 in this cell